The openEO R-Client is currently available on the openEO github repository. It can be installed via remotes::install_github(). To have the most up-to-date functionality select the develop version.
remotes::install_github(repo="Open-EO/openeo-r-client",
ref="develop", dependencies=TRUE)
packageVersion("openeo")
Load the openEO R-Client library and some other useful libraries.
library(openeo)
library(stars)
library(sf)
library(mapview)
library(mapedit)
library(dplyr)
library(ggplot2)
library(plotly)
First you can connect to the backend to explore the available collections (as seen in the Connections Pane) and the processes.
host = "https://openeo.vito.be"
con = openeo::connect(host)
## Connected to service: https://openeo.vito.be/openeo/1.0
## Please check the terms of service (terms_of_service()) and the privacy policy (privacy_policy()). By further usage of this service, you acknowledge and agree to those terms and policies.
To do processing you need to login. Either via OIDC (currently unavailable for the R-Client).
prov_ls = list_oidc_providers()
prov = prov_ls$egi
openeo::login(login_type = "oidc",
provider = prov,
config = list(client_id = prov$id, secret = NA))
Or for now with the basic login. Providing user and password.
openeo::login(user = "peter", password = "peter123", login_type = "basic")
## Login successful.
Once your connected you can discover processes and collections. This gives you valuable metadata on collections and information on how to use the processes.
The collections appear in the Connections pane of R-Studio. You can also list them via list_collections() and retrieve some first info.
names(openeo::list_collections()) %>% as.data.frame()
## .
## 1 S1_GRD_SIGMA0_ASCENDING
## 2 S1_GRD_SIGMA0_DESCENDING
## 3 TERRASCOPE_S2_FAPAR_V2
## 4 TERRASCOPE_S2_NDVI_V2
## 5 TERRASCOPE_S2_LAI_V2
## 6 TERRASCOPE_S2_FCOVER_V2
## 7 TERRASCOPE_S2_TOC_V2
## 8 TERRASCOPE_S1_SLC_COHERENCE_V1
## 9 PROBAV_L3_S10_TOC_333M
## 10 PROBAV_L3_S5_TOC_100M
## 11 TERRASCOPE_S5P_L3_NO2_TD_V1
## 12 TERRASCOPE_S5P_L3_NO2_TM_V1
## 13 TERRASCOPE_S5P_L3_NO2_TY_V1
## 14 TERRASCOPE_S5P_L3_CO_TD_V1
## 15 TERRASCOPE_S5P_L3_CO_TM_V1
## 16 TERRASCOPE_S5P_L3_CO_TY_V1
## 17 COPERNICUS_30
## 18 COPERNICUS_90
## 19 TERRASCOPE_S2_RHOW_V1
## 20 TERRASCOPE_S2_SPM_V1
## 21 TERRASCOPE_S2_TUR_V1
## 22 BIOPAR_FAPAR_V1_GLOBAL
## 23 CGLS_FAPAR_V2_GLOBAL
## 24 CGLS_LAI_V2_GLOBAL
## 25 CGLS_LAI300_V1_GLOBAL
## 26 CGLS_NDVI300_V1_GLOBAL
## 27 CGLS_NDVI_V2_GLOBAL
## 28 CGLS_BA300_V1_GLOBAL
## 29 TERRASCOPE_S1_GAMMA0_V1
## 30 S2_FAPAR_V102_WEBMERCATOR2
## 31 PROBAV_L3_S10_TOC_NDVI_333M
## 32 S2_FAPAR_SCENECLASSIFICATION_V102_PYRAMID
## 33 SENTINEL1_GAMMA0_SENTINELHUB
## 34 SENTINEL1_GRD
## 35 SENTINEL2_L1C_SENTINELHUB
## 36 SENTINEL2_L2A_SENTINELHUB
## 37 AGERA5
## 38 LANDSAT8_L1C
## 39 LANDSAT8_L2
## 40 MODIS
## 41 SENTINEL3_SLSTR
## 42 TERRASCOPE_S2_CHLA_V1
To get more details you can use the collection_viewer() which opens the description of the collection in the Viewer pane
collection_viewer("SENTINEL2_L1C_SENTINELHUB")
## Warning in htmlViewer(html): Cannot show a viewer panel. 'viewer' not available,
## maybe you are using this package not in RStudio.
To use the collection metadata for further coding you can use decribe_collection().
coll_meta = openeo::describe_collection("SENTINEL2_L1C_SENTINELHUB")
coll_meta$extent$spatial # you could use this to draw a bbox of the collection
## [1] -180 -56 180 83
To get an overview of the available processes use list_processes()
names(list_processes()) %>% as.data.frame()
## .
## 1 array_apply
## 2 arccos
## 3 arcosh
## 4 power
## 5 last
## 6 subtract
## 7 not
## 8 cosh
## 9 artanh
## 10 is_valid
## 11 first
## 12 median
## 13 eq
## 14 absolute
## 15 arctan2
## 16 array_labels
## 17 divide
## 18 is_nan
## 19 all
## 20 round
## 21 min
## 22 any
## 23 gte
## 24 cos
## 25 between
## 26 count
## 27 xor
## 28 extrema
## 29 and
## 30 variance
## 31 or
## 32 sum
## 33 sin
## 34 sinh
## 35 product
## 36 exp
## 37 neq
## 38 sd
## 39 sort
## 40 arsinh
## 41 int
## 42 order
## 43 array_find
## 44 if
## 45 sqrt
## 46 add
## 47 e
## 48 tan
## 49 mean
## 50 array_filter
## 51 mod
## 52 multiply
## 53 lte
## 54 pi
## 55 ceil
## 56 tanh
## 57 arctan
## 58 floor
## 59 array_element
## 60 clip
## 61 sgn
## 62 quantiles
## 63 arcsin
## 64 rearrange
## 65 array_contains
## 66 is_nodata
## 67 gt
## 68 ln
## 69 log
## 70 max
## 71 lt
## 72 load_collection
## 73 load_disk_data
## 74 apply_neighborhood
## 75 apply_dimension
## 76 save_result
## 77 apply
## 78 reduce_dimension
## 79 add_dimension
## 80 rename_labels
## 81 aggregate_temporal
## 82 aggregate_temporal_period
## 83 aggregate_spatial
## 84 mask
## 85 mask_polygon
## 86 filter_temporal
## 87 filter_bbox
## 88 filter_bands
## 89 apply_kernel
## 90 ndvi
## 91 resample_spatial
## 92 resample_cube_spatial
## 93 merge_cubes
## 94 run_udf
## 95 linear_scale_range
## 96 constant
## 97 histogram
## 98 read_vector
## 99 get_geometries
## 100 raster_to_vector
## 101 sleep
## 102 atmospheric_correction
## 103 sar_backscatter
## 104 resolution_merge
## 105 discard_result
## 106 mask_scl_dilation
## 107 ard_normalized_radar_backscatter
## 108 normalized_difference
To get detailed information about a process you can again use the process_viewer()
process_viewer("atmospheric_correction")
## Warning in htmlViewer(html): Cannot show a viewer panel. 'viewer' not available,
## maybe you are using this package not in RStudio.
Or describe_process()
describe_process("atmospheric_correction")
## Process: atmospheric_correction
## Summary:
## Description: iCor workflow test
## Returns: the corrected data as a data cube
##
## Parameter
## 1 data
## 2 method
## 3 elevation_model
## 4 missionId
## 5 sza
## 6 vza
## 7 raa
## 8 gnd
## 9 aot
## 10 cwv
## 11 appendDebugBands
## Description
## 1 Data cube containing multi-spectral optical top of atmosphere reflectances to be corrected.
## 2 The atmospheric correction method to use. To get reproducible results, you have to set a specific method.\n\nSet to `null` to allow the back-end to choose, which will improve portability, but reduce reproducibility as you *may* get different results if you run the processes multiple times.
## 3 The digital elevation model to use, leave empty to allow the back-end to make a suitable choice.
## 4 non-standard mission Id, currently defaults to sentinel2
## 5 non-standard if set, overrides sun zenith angle values [deg]
## 6 non-standard if set, overrides sensor zenith angle values [deg]
## 7 non-standard if set, overrides rel. azimuth angle values [deg]
## 8 non-standard if set, overrides ground elevation [km]
## 9 non-standard if set, overrides aerosol optical thickness [], usually 0.1..0.2
## 10 non-standard if set, overrides water vapor [], usually 0..7
## 11 non-standard if set to 1, saves debug bands
## Optional
## 1 FALSE
## 2 TRUE
## 3 TRUE
## 4 TRUE
## 5 TRUE
## 6 TRUE
## 7 TRUE
## 8 TRUE
## 9 TRUE
## 10 TRUE
## 11 TRUE
First define an area of interest interactively (either a point or area)
# this is interactive
# pnts = mapedit::drawFeatures()
# # this is static for points
# pnts = data.frame(x = 13.09884, y = 47.82496)
# pnts = st_as_sf(pnts, coords = c("x", "y"), crs = st_crs(4326))
# this is static for areas
pnts = c(xmin = 13.09741, ymin = 47.82417, xmax = 13.10026, ymax = 47.82565)
pnts = st_bbox(pnts, crs = st_crs(4326))
mapview(pnts)
bbox = st_bbox(pnts)
bbox = list(west = bbox[[1]],
east = bbox[[3]],
south = bbox[[2]],
north = bbox[[4]])
Define the collection (data set), time range and bands.
collection = "SENTINEL2_L1C_SENTINELHUB"
time_range = list("2018-01-01T00:00:00.000Z",
"2018-12-31T00:00:00.000Z")
bands = c("B04", "B08", "CLP", "B09", "B8A", "B11",
"sunAzimuthAngles", "sunZenithAngles", "viewAzimuthMean", "viewZenithMean")
#bands = c("B04", "B08")
Start building the processing chain. First, load the available processes.
p = processes()
# names(p)
Load the collection.
data = p$load_collection(id = collection,
spatial_extent = bbox,
temporal_extent = time_range,
bands = bands)
Atmospherical Correction
boa_corr = p$atmospheric_correction(data = data, method = "smac") # or use "iCor"
Calculate NDVI. Reduce the “bands” dimension with the well known NDVI formula.
ndvi_calc = p$reduce_dimension(data = boa_corr,
dimension = "bands",
reducer = function(data, context) {
red = data[1]
nir = data[2]
(nir-red)/(nir+red)})
Aggregate to temporal periods
# process_viewer("aggregate_temporal")
intervals = list(c('2018-01-02', '2018-02-01'),
c('2018-02-01', '2018-03-01'),
c('2018-03-01', '2018-04-01'),
c('2018-04-01', '2018-05-01'),
c('2018-05-01', '2018-06-01'),
c('2018-06-01', '2018-07-01'),
c('2018-07-01', '2018-08-01'),
c('2018-08-01', '2018-09-01'),
c('2018-09-01', '2018-10-01'),
c('2018-10-01', '2018-11-01'),
c('2018-11-01', '2018-12-30'))
labels = lapply(intervals, function(x){x[[1]]}) %>% unlist()
ndvi_mnth = p$aggregate_temporal(data = ndvi_calc,
intervals = intervals,
reducer = function(data, context){p$median(data)},
labels = labels,
dimension = "t")
Save the result.
result = p$save_result(data = ndvi_mnth, format="NetCDF")
No processing has happened so far. Only the workflow/process graph has been defined.
process_graph = as(result, "Graph")
synchronous call (result is computed directly)
out_name = "/home/pzellner@eurac.edu/git_projects/SRR1_notebooks/public_demo/data/r_ndvi_mnth_int.nc"
a = Sys.time()
compute_result(result,
format = "netCDF",
output_file = out_name,
con = con)
b = Sys.time()-a
b
# load the data into r
ndvi = read_ncdf(out_name)
## no 'var' specified, using band_0
## other available variables:
## spatial_ref, t, x, y
## Warning in .get_nc_projection(meta$attribute, rep_var, all_coord_var): No
## projection information found in nc file.
# check which projection your data has and assign it
#system(paste0("gdalinfo ", out_name))
st_crs(ndvi) = st_crs(32633)
# look at the time dimension
stars::st_get_dimension_values(ndvi, "t")
## [1] "2018-01-02 UTC" "2018-02-01 UTC" "2018-03-01 UTC" "2018-04-01 UTC"
## [5] "2018-05-01 UTC" "2018-06-01 UTC" "2018-07-01 UTC" "2018-08-01 UTC"
## [9] "2018-09-01 UTC" "2018-10-01 UTC" "2018-11-01 UTC"
plot some time slices
brks = seq(-0, 1, 0.1)
mapview(ndvi %>% slice("t", 1), at = brks) +
mapview(ndvi %>% slice("t", 3), at = brks) +
mapview(ndvi %>% slice("t", 5), at = brks) +
mapview(ndvi %>% slice("t", 7), at = brks) +
mapview(ndvi %>% slice("t", 9), at = brks) +
mapview(ndvi %>% slice("t", 11), at = brks)
get a point for plotting a pixel timeseries
# define a point
# pixel = mapedit::drawFeatures()
# static assignment
pixel = data.frame(x = 13.09884, y = 47.82496)
pixel = st_as_sf(pixel, coords = c("x", "y"), crs = st_crs(4326))
mapview(pixel) + mapview(st_bbox(ndvi))
subset to a point and plot a pixel time series
# subset to point
pixel = st_transform(x = pixel, crs = st_crs(ndvi))
ndvi_ts = ndvi[pixel]
# generate a data frame from the values and dates
ndvi_ts_df = data.frame(value = ndvi_ts %>% pull() %>% c(),
dates = as.Date(st_get_dimension_values(ndvi_ts, "t")))
# plot the timeseries
plot_ts = ggplot(ndvi_ts_df, aes(x = dates, y = value)) +
geom_line() +
geom_point()
plot_ts_plotly = plotly::ggplotly(plot_ts)
plot_ts_plotly